Close

@InProceedings{MagalhãesSilGomMarSil:2020:EvEmWi,
               author = "Magalh{\~a}es, Whendell Feij{\'o} and Silva, Jeferson Ferreira 
                         da and Gomes, Herman Martins and Marinho, Leandro Balby and 
                         Silveira, Pl{\'{\i}}nio",
          affiliation = "{Federal University of Campina Grande} and {Federal University of 
                         Campina Grande} and {Federal University of Campina Grande} and 
                         {Federal University of Campina Grande} and Hewlett Packard 
                         Enterprise, Brazil",
                title = "Evaluating the Emergence of Winning Tickets by Structured Pruning 
                         of Convolutional Networks",
            booktitle = "Proceedings...",
                 year = "2020",
               editor = "Musse, Soraia Raupp and Cesar Junior, Roberto Marcondes and 
                         Pelechano, Nuria and Wang, Zhangyang (Atlas)",
         organization = "Conference on Graphics, Patterns and Images, 33. (SIBGRAPI)",
            publisher = "IEEE Computer Society",
              address = "Los Alamitos",
             keywords = "neural network compression, structured pruning, winning tickets, 
                         weight rewinding, learning rate rewinding.",
             abstract = "The recently introduced Lottery Ticket Hypothesis has created a 
                         new investigation front in neural network pruning. The hypothesis 
                         states that it is possible to find subnetworks with high 
                         generalization capabilities (winning tickets) from an 
                         over-parameterized neural network. One step of the algorithm 
                         implementing the hypothesis requires resetting the weights of the 
                         pruned network to their initial random values. More recent 
                         variations of this step may involve: (i) resetting the weights to 
                         the values they had at an early epoch of the unpruned network 
                         training, or (ii) keeping the final training weights and resetting 
                         only the learning rate schedule. Despite some studies have 
                         investigated the above variations, mostly with unstructured 
                         pruning, we do not know of existing evaluations focusing on 
                         structured pruning regarding local and global pruning variations. 
                         In this context, this paper presents novel empirical evidence that 
                         it is possible to obtain winning tickets when performing 
                         structured pruning of convolutional neural networks. We setup an 
                         experiment using the VGG-16 network trained on the CIFAR-10 
                         dataset and compared networks (pruned at different compression 
                         levels) got by weight rewinding and learning rate rewinding 
                         methods, under local and global pruning regimes. We use the 
                         unpruned network as baseline and also compare the resulting pruned 
                         networks with their versions trained with randomly initialized 
                         weights. Overall, local pruning failed to find winning tickets for 
                         both rewinding methods. When using global pruning, weight 
                         rewinding produced a few winning tickets (limited to low pruning 
                         levels only) and performed nearly the same or worse compared to 
                         random initialization. Learning rate rewinding, under global 
                         pruning, produced the best results, since it has found winning 
                         tickets at most pruning levels and outperformed the baseline.",
  conference-location = "Porto de Galinhas (virtual)",
      conference-year = "7-10 Nov. 2020",
                  doi = "10.1109/SIBGRAPI51738.2020.00044",
                  url = "http://dx.doi.org/10.1109/SIBGRAPI51738.2020.00044",
             language = "en",
                  ibi = "8JMKD3MGPEW34M/43BCFTS",
                  url = "http://urlib.net/ibi/8JMKD3MGPEW34M/43BCFTS",
           targetfile = "133.pdf",
        urlaccessdate = "2024, May 06"
}


Close